Introduction

In this study we tested if free-flying honey bees that are habituated to even composite numbers spontaneously discriminate an odd prime number from an odd composite number. This document provides details on data processing, statistical analysis and figures of the original manuscript submitted for peer review.

The following code is written in the R programming language.

EE = Equal element size condition, SA = Equal surface area condition

Libraries

Install packages

install.packages("lme4")  # For fitting models
install.packages("ggplot2") # For plotting
install.packages("wesanderson") # Color palette
install.packages("dplyr") # Data processing
install.packages("tidyverse") # QoL
install.packages("ggpubr") # Create figure
install.packages("patchwork") # Create figure
install.packages("DHARMa")
install.packages("performance") # For assumption checking
install.packages("lmerTest")

Load packages

library(lme4) 
library(lmerTest)
library(ggplot2) 
library(wesanderson)
library(dplyr) 
library(tidyverse) 
library(patchwork) 
library(performance) # For assumption checking
library(DHARMa) # For assumption checking

Data importing

PRIMEDATA <- read.csv("../Data/prime_data.csv", header=TRUE)

PRIMEGRP <- read.csv(
  "../Data/primegrouped_data.csv", header=TRUE)

CONTROL <- read.csv(
  "../Data/control_data.csv", header=TRUE)

TRAINING <- read.csv(
  "../Data/training_data.csv", header=TRUE)

#Subset Prime data 

PRIMEDATA$BEEID <- as.factor(PRIMEDATA$BEEID) # Treat Bee ID as factor
POSITIVE <- subset(PRIMEDATA, CONDITION == "POSITIVE") 
NEGATIVE <- subset(PRIMEDATA, CONDITION == "NEGATIVE") 
POS7v9 <-subset(POSITIVE, TEST == "7v9") 
POS11v9 <-subset(POSITIVE, TEST == "11v9") 
POS13v15 <-subset(POSITIVE, TEST == "13v15") 
NEG7v9 <-subset(NEGATIVE, TEST == "7v9") 
NEG11v9 <-subset(NEGATIVE, TEST == "11v9") 
NEG13v15 <-subset(NEGATIVE, TEST == "13v15") 
summary(POS7v9)
summary(NEG7v9)
summary(POS11v9)
summary(NEG11v9)
summary(POS13v15)
summary(NEG13v15)

#Subset Prime grouped data

PRIMEGRP$BEEID <- as.factor(PRIMEGRP$BEEID) # Treat Bee ID as factor
POSITIVE_GRP <- subset(PRIMEGRP, CONDITION == "POS") 
NEGATIVE_GRP <- subset(PRIMEGRP, CONDITION == "NEG") 
POS7v9_GRP <-subset(POSITIVE_GRP, TEST == "7v9") 
POS11v9_GRP <-subset(POSITIVE_GRP, TEST == "11v9") 
POS13v15_GRP <-subset(POSITIVE_GRP, TEST == "13v15") 
NEG7v9_GRP <-subset(NEGATIVE_GRP, TEST == "7v9") 
NEG11v9_GRP <-subset(NEGATIVE_GRP, TEST == "11v9") 
NEG13v15_GRP <-subset(NEGATIVE_GRP, TEST == "13v15") 
POSCONT_GRP <-subset(POSITIVE_GRP, TEST == "control")
NEGCONT_GRP<-subset(NEGATIVE_GRP, TEST == "control")
summary(POS7v9_GRP)
summary(NEG7v9_GRP)
summary(POS11v9_GRP)
summary(NEG11v9_GRP)
summary(POS13v15_GRP)
summary(NEG13v15_GRP)

#Subset control data

CONTROL$BEEID <- as.factor(CONTROL$BEEID) # Treat Bee ID as factor

conSA <-subset(CONTROL, CONDITION == "SA") 
conEE <-subset(CONTROL, CONDITION == "EE") 

EEcon3v1 <-subset(conEE, TEST == "3v1") 
EEcon3v2 <-subset(conEE, TEST == "3v2") 
EEcon3v4 <-subset(conEE, TEST == "3v4") 
EEcon3v5 <-subset(conEE, TEST == "3v5") 

SAcon3v1 <-subset(conSA, TEST == "3v1") 
SAcon3v2 <-subset(conSA, TEST == "3v2") 
SAcon3v4 <-subset(conSA, TEST == "3v4") 
SAcon3v5 <-subset(conSA, TEST == "3v5") 

summary(EEcon3v1)
summary(EEcon3v2)
summary(EEcon3v4)
summary(EEcon3v5)
summary(SAcon3v1)
summary(SAcon3v2)
summary(SAcon3v4)
summary(SAcon3v5)

#Subset training data

TRAINING$BEEID <- as.factor(TRAINING$BEEID) # Treat Bee ID as factor
TRAINING$CONDITION <- as.factor(TRAINING$CONDITION)
summary(TRAINING)


Training_learningtest <- subset(TRAINING, CONDITION == "Learning")
Training_learning_EE <- subset(TRAINING, STIMULI == "EE")
Training_learning_SA <- subset(TRAINING, STIMULI == "SA")

Training_7v9test <- subset(TRAINING, CONDITION == "7v9") 


Training_7v8test <-subset(TRAINING, CONDITION == "7v8")  


summary(TRAINING)
summary(Training_learning_EE)
summary(Training_learning_SA)

Generalized linear models

Experiment 1

lm1 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS7v9) 
summary(lm1)
lm2 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS11v9) 
summary(lm2)
lm3 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS13v15) 
summary(lm3)
lm4 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG7v9) 
summary(lm4)
lm5 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG11v9) 
summary(lm5)
lm6 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG13v15) 
summary(lm6)

Pooled dataset

We use and report models using a pooled dataset (combining equal element size and equal surface area groups) as we found no statistically significant results in either condition

EXP1POOLED7v9 <-subset(PRIMEDATA, TEST == "7v9") 
EXP1POOLED11v9 <-subset(PRIMEDATA, TEST == "11v9") 
EXP1POOLED13v15 <-subset(PRIMEDATA, TEST == "13v15") 

lm7 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP1POOLED7v9) 
summary(lm7)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP1POOLED7v9
## 
##      AIC      BIC   logLik deviance df.resid 
##    272.6    279.1   -134.3    268.6      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.4814 -0.9016  0.5082  0.8697  1.6297 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.6152   0.7843  
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  0.09066    0.23178   0.391    0.696
lm8 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP1POOLED11v9) 
summary(lm8)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP1POOLED11v9
## 
##      AIC      BIC   logLik deviance df.resid 
##    279.2    285.8   -137.6    275.2      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.1446 -1.0988  0.8737  0.9101  0.9480 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.0292   0.1709  
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.2021     0.1478   1.367    0.172
lm9 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP1POOLED13v15) 
summary(lm9)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP1POOLED13v15
## 
##      AIC      BIC   logLik deviance df.resid 
##    281.2    287.8   -138.6    277.2      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -0.9802 -0.9802 -0.9802  1.0202  1.0202 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0        0       
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept) -0.04001    0.14145  -0.283    0.777
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Confidence intervals

confint(lm7)
EXP1_7v9confintlow <-(exp(-0.3941168)/(1 + exp( -0.3941168)))
EXP1_7v9confinthigh <-(exp(0.5825354)/(1 + exp(0.5825354)))
confint(lm8)
EXP1_11v9confintlow <-(exp(-0.1001439)/(1 + exp(-0.1001439)))
EXP1_11v9confinthigh <-(exp(0.5160843)/(1 + exp(0.5160843)))
confint(lm9)
EXP1_13v15confintlow <-(exp(-0.3178457)/(1 + exp(-0.3178457)))
EXP1_13v15confinthigh <-(exp(0.2373101)/(1 + exp(0.2373101)))

Experiment 2

lm10 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS7v9_GRP) 
summary(lm10)
lm11 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS11v9_GRP) 
summary(lm11)
lm12 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POS13v15_GRP) 
summary(lm12)
lm13 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = POSCONT_GRP) 
summary(lm13)

lm14 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG7v9_GRP) 
summary(lm14)
lm15 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG11v9_GRP) 
summary(lm15)
lm16 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEG13v15_GRP) 
summary(lm16)
lm17 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = NEGCONT_GRP) 
summary(lm17)

Pooled dataset

We use and report models using a pooled dataset (combining equal element size and equal surface area groups) as we found no statistically significant results in either condition

EXP2POOLED7v9 <-subset(PRIMEGRP, TEST == "7v9") 
EXP2POOLED11v9 <-subset(PRIMEGRP, TEST == "11v9") 
EXP2POOLED13v15 <-subset(PRIMEGRP, TEST == "13v15") 
EXP2POOLEDNEGCONT <-subset(PRIMEGRP, TEST == "control") 

lm18 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP2POOLED7v9) 
summary(lm18)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP2POOLED7v9
## 
##      AIC      BIC   logLik deviance df.resid 
##    278.4    285.0   -137.2    274.4      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.1281 -1.1281  0.8864  0.8864  0.8864 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 1e-14    1e-07   
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)  
## (Intercept)   0.2412     0.1425   1.693   0.0905 .
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
lm19 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP2POOLED11v9) 
summary(lm19)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP2POOLED11v9
## 
##      AIC      BIC   logLik deviance df.resid 
##    280.9    287.5   -138.5    276.9      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.0408 -1.0408  0.9608  0.9608  0.9608 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0        0       
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  0.08004    0.14153   0.566    0.572
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
lm20 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP2POOLED13v15) 
summary(lm20)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP2POOLED13v15
## 
##      AIC      BIC   logLik deviance df.resid 
##    279.3    285.9   -137.6    275.3      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -0.9045 -0.9045 -0.9045  1.1055  1.1055 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0        0       
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  -0.2007     0.1421  -1.412    0.158
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
lm21<-glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EXP2POOLEDNEGCONT) 
summary(lm21)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EXP2POOLEDNEGCONT
## 
##      AIC      BIC   logLik deviance df.resid 
##    280.8    287.4   -138.4    276.8      198 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -0.9512 -0.9512 -0.9512  1.0513  1.0513 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0        0       
## Number of obs: 200, groups:  BEEID, 20
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  -0.1001     0.1416  -0.707     0.48
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')

Confidence intervals

confint(lm18)
EXP2_7v9confintlow <-(exp(-0.04782218)/(1 + exp(-0.04782218)))
EXP2_7v9confinthigh <-(exp(0.5434547)/(1 + exp(0.5434547)))
confint(lm19)
EXP2_11v9confintlow <-(exp(-0.197514)/(1 + exp(-0.197514)))
EXP2_11v9confinthigh <-(exp(0.3590998)/(1 + exp(0.3590998)))
confint(lm20)
EXP2_13v15confintlow <-(exp(-0.4808859)/(1 + exp(-0.4808859)))
EXP2_13v15confinthigh <-(exp(0.07708543)/(1 + exp(0.07708543)))
confint(lm21)
EXP2_NEGCONTconfintlow <-(exp(-0.3787003)/(1 + exp(-0.3787003)))
EXP2_NEGCONTconfinthigh <-(exp(0.1772473)/(1 + exp(0.1772473)))

Control experiment

clm1 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EEcon3v1) 
summary(clm1)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EEcon3v1
## 
##      AIC      BIC   logLik deviance df.resid 
##    133.5    138.7    -64.7    129.5       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.3628 -1.3628  0.7338  0.7338  0.7338 
## 
## Random effects:
##  Groups Name        Variance  Std.Dev. 
##  BEEID  (Intercept) 2.973e-14 1.724e-07
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)   
## (Intercept)   0.6190     0.2097   2.953  0.00315 **
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
clm2 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EEcon3v2) 
summary(clm2)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EEcon3v2
## 
##      AIC      BIC   logLik deviance df.resid 
##    141.7    146.9    -68.8    137.7       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.3509 -0.9444  0.7402  0.9186  1.2215 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.22     0.4691  
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.0428     0.2534   0.169    0.866
clm3 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EEcon3v4) 
summary(clm3)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EEcon3v4
## 
##      AIC      BIC   logLik deviance df.resid 
##    139.1    144.4    -67.6    135.1       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.4482 -0.9708  0.5557  0.8680  1.1365 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.3844   0.62    
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.2684     0.2890   0.929    0.353
clm4 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = EEcon3v5) 
summary(clm4)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: EEcon3v5
## 
##      AIC      BIC   logLik deviance df.resid 
##    142.2    147.4    -69.1    138.2       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.2368 -0.9871  0.8085  0.9259  1.0596 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.116    0.3405  
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  0.08259    0.22991   0.359    0.719
clm5 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = SAcon3v1) 
summary(clm5)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: SAcon3v1
## 
##      AIC      BIC   logLik deviance df.resid 
##    140.6    145.8    -68.3    136.6       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.2277 -1.1319  0.8145  0.8599  0.9325 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.06237  0.2497  
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.2862     0.2188   1.308    0.191
clm6 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = SAcon3v2) 
summary(clm6)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: SAcon3v2
## 
##      AIC      BIC   logLik deviance df.resid 
##    137.9    143.1    -66.9    133.9       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.6973 -0.8924  0.5892  0.8754  1.4470 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.6397   0.7998  
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  0.04928    0.33183   0.149    0.882
clm7 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = SAcon3v4) 
summary(clm7)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: SAcon3v4
## 
##      AIC      BIC   logLik deviance df.resid 
##    142.0    147.2    -69.0    138.0       98 
## 
## Scaled residuals: 
##    Min     1Q Median     3Q    Max 
## -1.083 -1.083  0.923  0.923  0.923 
## 
## Random effects:
##  Groups Name        Variance  Std.Dev. 
##  BEEID  (Intercept) 5.429e-15 7.368e-08
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.1603     0.2006   0.799    0.424
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
clm8 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = SAcon3v5) 
summary(clm8)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: SAcon3v5
## 
##      AIC      BIC   logLik deviance df.resid 
##    141.9    147.1    -68.9    137.9       98 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.0077 -0.9076 -0.8613  1.0733  1.1610 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.06011  0.2452  
## Number of obs: 100, groups:  BEEID, 10
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  -0.1627     0.2167  -0.751    0.453

Confidence intervals

confint(clm1)
EE3v1confintlow <-(exp(0.3592665)/(1 + exp(0.3592665)))
EE3v1confinthigh <-(exp(1.2555766)/(1 + exp(1.2555766)))
confint(clm2)
EE3v2confintlow <-(exp(-0.632105)/(1 + exp(-0.632105)))
EE3v2confinthigh <-(exp(0.6466399)/(1 + exp(0.6466399)))
confint(clm3)
EE3v4confintlow <-(exp(-0.408491)/(1 + exp(-0.408491)))
EE3v4confinthigh <-(exp(0.5026474)/(1 + exp(0.5026474)))
confint(clm4)
EE3v5confintlow <-(exp(-0.3296674)/(1 + exp(-0.3296674)))
EE3v5confinthigh <-(exp(0.7213893)/(1 + exp(0.7213893)))

confint(clm5)
SA3v1confintlow <-(exp(-0.1827046)/(1 + exp(-0.1827046)))
SA3v1confinthigh <-(exp(0.7875859)/(1 + exp(0.7875859)))
confint(clm6)
SA3v2confintlow <-(exp(-0.6834336)/(1 + exp(-0.6834336)))
SA3v2confinthigh <-(exp(0.7978965)/(1 + exp(0.7978965)))
confint(clm7)
SA3v4confintlow <-(exp(-0.2441276)/(1 + exp(-0.2441276)))
SA3v4confinthigh <-(exp(0.5751515)/(1 + exp(0.5751515)))
confint(clm8)
SA3v5confintlow <-(exp(-0.6521079)/(1 + exp(-0.6521079)))
SA3v5confinthigh <-(exp(0.3092899)/(1 + exp(0.3092899)))

Training experiment

tlm1 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = Training_learningtest) 
summary(tlm1)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: Training_learningtest
## 
##      AIC      BIC   logLik deviance df.resid 
##    225.8    232.0   -110.9    221.8      158 
## 
## Scaled residuals: 
##    Min     1Q Median     3Q    Max 
##     -1     -1      0      1      1 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 8.1e-15  9e-08   
## Number of obs: 160, groups:  BEEID, 16
## 
## Fixed effects:
##               Estimate Std. Error z value Pr(>|z|)
## (Intercept) -3.053e-17  1.581e-01       0        1
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
tlm2 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = Training_7v9test) 
summary(tlm2)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: Training_7v9test
## 
##      AIC      BIC   logLik deviance df.resid 
##    224.2    230.4   -110.1    220.2      158 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.1055 -1.1055  0.9045  0.9045  0.9045 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 4e-14    2e-07   
## Number of obs: 160, groups:  BEEID, 16
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.2007     0.1589   1.263    0.207
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
tlm3 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = Training_7v8test) 
summary(tlm3)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ 1 + (1 | BEEID)
##    Data: Training_7v8test
## 
##      AIC      BIC   logLik deviance df.resid 
##    224.6    230.7   -110.3    220.6      158 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.2762 -1.0006  0.7836  0.9522  1.0490 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 0.1278   0.3575  
## Number of obs: 160, groups:  BEEID, 16
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)   0.1294     0.1842   0.702    0.483
tlm4 <- glmer(CHOICE~ STIMULI + (1|BEEID), family = binomial, data = Training_learningtest) 
summary(tlm4)
## Generalized linear mixed model fit by maximum likelihood (Laplace
##   Approximation) [glmerMod]
##  Family: binomial  ( logit )
## Formula: CHOICE ~ STIMULI + (1 | BEEID)
##    Data: Training_learningtest
## 
##      AIC      BIC   logLik deviance df.resid 
##    227.7    236.9   -110.9    221.7      157 
## 
## Scaled residuals: 
##     Min      1Q  Median      3Q     Max 
## -1.0253 -0.9753  0.0000  0.9753  1.0253 
## 
## Random effects:
##  Groups Name        Variance Std.Dev.
##  BEEID  (Intercept) 4e-14    2e-07   
## Number of obs: 160, groups:  BEEID, 16
## 
## Fixed effects:
##             Estimate Std. Error z value Pr(>|z|)
## (Intercept)  0.05001    0.22368   0.224    0.823
## STIMULISA   -0.10002    0.31633  -0.316    0.752
## 
## Correlation of Fixed Effects:
##           (Intr)
## STIMULISA -0.707
## optimizer (Nelder_Mead) convergence code: 0 (OK)
## boundary (singular) fit: see help('isSingular')
tlm5 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = Training_learning_EE) 
tlm6 <- glmer(CHOICE~ 1 + (1|BEEID), family = binomial, data = Training_learning_SA) 

Confidence intervals

confint(tlm1)
traininglearningconfintlow <-(exp(-0.311725)/(1 + exp(-0.311725)))
traininglearningconfinthigh <-(exp(0.3117250)/(1 + exp(0.3117250)))
confint(tlm2)
training7v9confintlow <-(exp(-0.1166512)/(1 + exp(-0.1166512)))
training7v9confinthigh <-(exp(0.5277194)/(1 + exp(0.5277194)))
confint(tlm3)
training7v8confintlow <-(exp(-0.2557725)/(1 + exp(-0.2557725)))
training7v8confinthigh <-(exp(0.5267269)/(1 + exp(0.5267269)))
confint(tlm4)
training_stim_confintlow <-(exp(-0.7246346)/(1 + exp(-0.7246346)))
training_stim_confinthigh <-(exp(0.5220422)/(1 + exp(0.5220422)))

confint(tlm5)
training_EE_confintlow <-(exp(-0.06959618)/(1 + exp(-0.06959618)))
training_EE_confinthigh <-(exp(0.4391975)/(1 + exp(0.4391975)))
confint(tlm6)
training_SA_confintlow <-(exp(-0.3101593)/(1 + exp(-0.3101593)))
training_SA_confinthigh <-(exp(0.3813845)/(1 + exp(0.3813845)))

Figures

# Create dataframe for experiment 1 bargraph

dframe <- data.frame(Test = rep(c("7v9", "11v9", "13v15")),
                     percentage = c(0.52,0.55,0.49),
                     confintlow = c(EXP1_7v9confintlow,EXP1_11v9confintlow,EXP1_13v15confintlow),
                     confinthigh = c(EXP1_7v9confinthigh,EXP1_11v9confinthigh,EXP1_13v15confinthigh))


exp1points <- PRIMEDATA %>%   # calculate mean of each block for each bee
  group_by(BEEID, TEST, CONDITION) %>% 
  summarize(prop = mean(CHOICE)) 


# Create dataframe for experiment 2 bargraph

dframe2 <- data.frame(Test = rep(c("7v9", "11v9", "13v15","control")), 
                     percentage = c(0.56,0.52,0.45,0.475),
                     confintlow = c(EXP2_7v9confintlow,EXP2_11v9confintlow,EXP2_13v15confintlow,EXP2_NEGCONTconfintlow),
                     confinthigh = c(EXP2_7v9confinthigh,EXP2_11v9confinthigh,EXP2_13v15confinthigh,EXP2_NEGCONTconfinthigh))

exp2points <- PRIMEGRP %>%   # calculate mean of each block for each bee
  group_by(BEEID, TEST, CONDITION) %>% 
  summarize(prop = mean(CHOICE))


# Create dataframe for control experiment bargraph

dframecon <- data.frame(Test = rep(c("3v1", "3v2","3v4","3v5"), each = 2), 
                    Condition = rep(c("EE", "SA"), times = 4),
                    percentage = c(0.6889, 0.57, 0.5, 0.51, 0.5111, 0.54, 0.5444, 0.46),
                     confintlow = c(EE3v1confintlow, SA3v1confintlow, EE3v2confintlow, SA3v2confintlow, EE3v4confintlow, SA3v4confintlow, EE3v5confintlow, SA3v5confintlow),
                     confinthigh = c(EE3v1confinthigh, SA3v1confinthigh, EE3v2confinthigh, SA3v2confinthigh, EE3v4confinthigh, SA3v4confinthigh, EE3v5confinthigh, SA3v5confinthigh))


controlpoints <- CONTROL %>%   # calculate mean of each block for each bee
  group_by(BEEID, TEST, CONDITION) %>% 
  summarize(prop = mean(CHOICE)) 


# Create dataframe for training experiment

dframetrn <- data.frame(Test = rep(c("Learning", "7v9","7v8")), #create dataframe
                        percentage = c(0.5, 0.55, 0.53),
                        confintlow = c(traininglearningconfintlow, training7v9confintlow, training7v8confintlow),
                        confinthigh = c(traininglearningconfinthigh, training7v9confinthigh, training7v8confinthigh))

trainingpoints <- TRAINING %>%   # calculate mean of each block for each bee
  group_by(BEEID, CONDITION) %>% 
  summarize(prop = mean(CHOICE))

levels(trainingpoints$CONDITION) # view the existing order of factors in the trainingpoints
## [1] "7v8"      "7v9"      "Learning"
trainingpoints$CONDITION <- factor(trainingpoints$CONDITION, levels=c('Learning', '7v9', '7v8')) # set the order of your factors as you wish
levels(trainingpoints$CONDITION) # check to view the change
## [1] "Learning" "7v9"      "7v8"
dframetrn$Test <- factor(dframetrn$Test, levels = c('Learning', '7v9', '7v8')) 
dframetrn$Test # check to view the change
## [1] Learning 7v9      7v8     
## Levels: Learning 7v9 7v8
# Create dataframe for supplementary figure comparing performance in EE and SA

dframetrn2 <- data.frame(Stimuli = rep(c("EE", "SA")), #create dataframe
                        percentage = c(0.55, 0.51),
                        confintlow = c(training_EE_confintlow, training_SA_confintlow),
                        confinthigh = c(training_EE_confinthigh, training_SA_confinthigh))

trainingpoints2 <- Training_learningtest %>%   # calculate mean of each block for each bee
  group_by(BEEID, STIMULI) %>% 
  summarize(prop = mean(CHOICE))

              
# Generate bargraph for experiment 1 

exp1graph <- ggplot()+
  labs (x= "Test condition", y = "Mean percentage of choices") +
  geom_bar(data = dframe, aes(x = Test, y = percentage, fill = Test), stat = "identity", position = "dodge") + 
   geom_point(data = exp1points, aes(x = TEST, y = prop, col = BEEID), position = position_jitter(width=0.15, height=0.01), size =1) +
  geom_errorbar(data= dframe, aes(x = Test, y = percentage, ymin = confintlow, ymax = confinthigh), width=.2, position=position_dodge(.9)) +
  geom_hline(yintercept = 0.5, linetype = "dashed", colour = " black") +
  scale_fill_manual(values = wes_palette("GrandBudapest2", n = 3)) +
  scale_y_continuous(labels = scales::percent_format(accuracy = 1, suffix = NULL)) +
  scale_x_discrete(labels=c("7v9" = "7 vs 9", "11v9" = "11 vs 9", "13v15" = "13 vs 15"))+
  theme(panel.background = element_blank(),
        axis.line = element_line(colour = "black"),
        aspect.ratio = 1/1, 
        plot.margin = margin(0.3,0.3,0.3,0.3, "cm"),
        axis.title.x = element_text(vjust = -3)) + theme(legend.position = "none")
print(exp1graph)

# Generate bargraph for experiment 2
exp2graph <- ggplot() +
  labs(x = "Test condition", y = NULL) +  # Set y-axis title to NULL
  geom_bar(data = dframe2, aes(x = Test, y = percentage, fill = Test), stat = "identity", position = "dodge") + 
  geom_point(data = exp2points, aes(x = TEST, y = prop, col = BEEID), position = position_jitter(width = 0.15, height=0.01), size = 1) +
  geom_errorbar(data = dframe2, aes(x = Test, y = percentage, ymin = confintlow, ymax = confinthigh), width = .2, position = position_dodge(.9)) +
  geom_hline(yintercept = 0.5, linetype = "dashed", colour = "black") +
  scale_fill_manual(values = wes_palette("GrandBudapest2", n = 4)) +
  scale_y_continuous(labels = scales::percent_format(accuracy = 1, suffix = NULL)) +
  scale_x_discrete(labels = c("7v9" = "7 vs 9", "11v9" = "11 vs 9", "13v15" = "13 vs 15", "control" = "control")) +
  theme(panel.background = element_blank(),
        axis.line = element_line(colour = "black"),
        aspect.ratio = 1 / 1, 
        plot.margin = margin(0.3,0.3,0.3,0.3, "cm"),
        axis.title.y = element_blank(),  # Hide y-axis title
        axis.title.x = element_text(vjust = -3)) + theme(legend.position = "none")

print(exp2graph)

# Generate bargraph for control experiment

custom_shapes <- c(21, 22, 23, 24, 25, 21, 22, 23, 24, 25)

congraph <- ggplot()+ 
  labs (x= "Test condition", y = "Mean percentage of choices") +
  geom_bar(data = dframecon, aes(x = Test, y = percentage, fill = Condition), stat = "identity", position = "dodge") + 
  geom_hline(yintercept = 0.5, linetype = "dashed", colour = " black") +
  geom_point(data = controlpoints, aes(x = TEST, y = prop, fill = CONDITION), position = position_jitterdodge(jitter.width=0.15, jitter.height = 0.01, dodge.width = 0.9), show.legend = FALSE, size =1, colour="#696969") +
  scale_fill_manual(values = wes_palette("GrandBudapest2", n = 2)) +
  geom_errorbar(data= dframecon, aes(x = Test, y = percentage, ymin = confintlow, ymax = confinthigh, group = Condition), width=.15, position=position_dodge(.9)) +
  geom_text(data= dframecon, x =0.775, y = 0.83, label ="*", size =6) +
  scale_y_continuous(labels = scales::percent_format(accuracy = 1, suffix = NULL))+
  theme(panel.background = element_blank(),
        axis.line = element_line(colour = "black"),
        aspect.ratio = 1/1,
        axis.title.x = element_text(vjust = -3),
        plot.margin = margin(0.3,0.3,0.3,0.3, "cm")) + theme(legend.position = "none")
print(congraph)

# Generate bargraph for training experiment

trngraph <- ggplot()+
  labs (x= "Test condition", y = "Mean percentage of choices") +
  geom_bar(data = dframetrn, aes(x = Test, y = percentage, fill = Test), stat = "identity", position = "dodge") + 
   geom_point(data = trainingpoints, aes(x = CONDITION, y = prop, col = BEEID), position = position_jitter(width=0.15,height=0.01), size =1) +
  geom_errorbar(data= dframetrn, aes(x = Test, y = percentage, ymin = confintlow, ymax = confinthigh), width=.2, position=position_dodge(.9)) +
  geom_hline(yintercept = 0.5, linetype = "dashed", colour = " black") +
  scale_fill_manual(values = wes_palette("GrandBudapest2", n = 3)) +
  scale_y_continuous(labels = scales::percent_format(accuracy = 1, suffix = NULL), limits = c(0, 1)) +
  theme(panel.background = element_blank(),
        axis.line = element_line(colour = "black"),
        aspect.ratio = 1/1, 
        axis.title.y = element_blank(),
        plot.margin = margin(0.3,0.3,0.3,0.3, "cm"),
        axis.title.x = element_text(vjust = -3)) + theme(legend.position = "none")
print(trngraph)

# Generate bargraph comparing EE and SA performance in the learning test

trngraph2 <- ggplot()+
  labs (x= "Stimulus condition", y = "Mean percentage of correct choices") +
  geom_bar(data = dframetrn2, aes(x = Stimuli, y = percentage, fill = Stimuli), stat = "identity", position = "dodge") + 
   geom_point(data = trainingpoints2, aes(x = STIMULI, y = prop, col = BEEID), position = position_jitter(width=0.15, height=0.01), size =1) +
  geom_errorbar(data= dframetrn2, aes(x = Stimuli, y = percentage, ymin = confintlow, ymax = confinthigh), width=.2, position=position_dodge(.9)) +
  geom_hline(yintercept = 0.5, linetype = "dashed", colour = " black") +
  scale_fill_manual(values = wes_palette("GrandBudapest2", n = 2)) +
  scale_y_continuous(labels = scales::percent_format(accuracy = 1, suffix = NULL), limits = c(0, 1))  +
  theme(panel.background = element_blank(),
        axis.line = element_line(colour = "black"),
        aspect.ratio = 1/1, 
        axis.title.y = element_text(vjust = 5),
        axis.title.x = element_text(vjust = -3),
        plot.margin = margin(0.6,0.6,0.6,0.6, "cm")) + theme(axis.title = element_text(size = 15),
    axis.text.x = element_text(size = 15),
    axis.text.y = element_text(size = 15),
    axis.title.y = element_text(vjust = 5, size = 15)) + theme(legend.position = "none")
print(trngraph2)

Generate Figure x

figx <- exp1graph + exp2graph
figx + plot_annotation(tag_levels = "a") & 
  theme(plot.tag.position = c(-0.05, 1),plot.tag = element_text(size = 16, face = "bold"))

ggsave("figx.pdf", device = pdf, height= 4, width=8.2, dpi = 300, path = "../Output")
ggsave("figx.png", device = png, height= 4, width=8.2, dpi = 300, path = "../Output")


figx2 <- congraph + trngraph
figx2 + plot_annotation(tag_levels = "a") & 
  theme(plot.tag.position = c(-0.05, 1),plot.tag = element_text(size = 16, face = "bold"))

ggsave("figx2.pdf", device = pdf, height= 4, width=8.2, dpi = 300, path = "../Output")
ggsave("figx2.png", device = png, height= 4, width=8.2, dpi = 300, path = "../Output")
trngraph2

ggsave("figSuppx.pdf", device = pdf, height= 5, width=5, dpi = 300, path = "../Output")
ggsave("figSuppx.png", device = png, height= 5, width=5, dpi = 300, path = "../Output")

Checking Model fit

models <- list(
  EXP1_7v9_pooled = lm7,
  EXP1_9v11_pooled = lm8,
  EXP1_13v15_pooled = lm9,
  EXP2_7v9_pooled = lm18,
  EXP2_9v11_pooled = lm19,
  EXP2_13v15_pooled = lm20,
  EXP2_NEGCONT_pooled = lm21,
  EEcon3v1 = clm1,
  EEcon3v2 = clm2,
  EEcon3v4 = clm3,
  EEcon3v5 = clm4,
  SAcon3v1 = clm5,
  SAcon3v2 = clm6,
  SAcon3v4 = clm7,
  SAcon3v5 = clm8,
  training_learning = tlm1,
  training_7v9 = tlm2,
  training_7v8 = tlm3,
  training_EEvsSA_test = tlm4)
  



# Check for overdispersion
for (model_name in names(models)) {
  cat(paste0("Overdispersion Check for ", model_name, "\n"))
  print(check_overdispersion(models[[model_name]]))
  cat("\n")
}
## Overdispersion Check for EXP1_7v9_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.010
##           p-value =  0.56
## 
## 
## Overdispersion Check for EXP1_9v11_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.007
##           p-value =     1
## 
## 
## Overdispersion Check for EXP1_13v15_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.006
##           p-value =   0.4
## 
## 
## Overdispersion Check for EXP2_7v9_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.007
##           p-value = 0.952
## 
## 
## Overdispersion Check for EXP2_9v11_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.006
##           p-value =  0.72
## 
## 
## Overdispersion Check for EXP2_13v15_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.007
##           p-value = 0.856
## 
## 
## Overdispersion Check for EXP2_NEGCONT_pooled
## # Overdispersion test
## 
##  dispersion ratio = 1.007
##           p-value = 0.704
## 
## 
## Overdispersion Check for EEcon3v1
## # Overdispersion test
## 
##  dispersion ratio = 1.013
##           p-value = 0.904
## 
## 
## Overdispersion Check for EEcon3v2
## # Overdispersion test
## 
##  dispersion ratio = 1.014
##           p-value = 0.376
## 
## 
## Overdispersion Check for EEcon3v4
## # Overdispersion test
## 
##  dispersion ratio = 1.023
##           p-value = 0.944
## 
## 
## Overdispersion Check for EEcon3v5
## # Overdispersion test
## 
##  dispersion ratio = 1.013
##           p-value = 0.504
## 
## 
## Overdispersion Check for SAcon3v1
## # Overdispersion test
## 
##  dispersion ratio = 1.016
##           p-value =  0.96
## 
## 
## Overdispersion Check for SAcon3v2
## # Overdispersion test
## 
##  dispersion ratio = 1.020
##           p-value =   0.2
## 
## 
## Overdispersion Check for SAcon3v4
## # Overdispersion test
## 
##  dispersion ratio = 1.012
##           p-value =  0.76
## 
## 
## Overdispersion Check for SAcon3v5
## # Overdispersion test
## 
##  dispersion ratio = 1.013
##           p-value = 0.968
## 
## 
## Overdispersion Check for training_learning
## # Overdispersion test
## 
##  dispersion ratio = 1.005
##           p-value = 0.192
## 
## 
## Overdispersion Check for training_7v9
## # Overdispersion test
## 
##  dispersion ratio = 1.009
##           p-value = 0.856
## 
## 
## Overdispersion Check for training_7v8
## # Overdispersion test
## 
##  dispersion ratio = 1.008
##           p-value = 0.896
## 
## 
## Overdispersion Check for training_EEvsSA_test
## # Overdispersion test
## 
##  dispersion ratio = 1.008
##           p-value = 0.344
# Compute performance metrics for each model
performance_results <- lapply(models, model_performance)
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
## Random effect variances not available. Returned R2 does not account for random effects.
# Capture and print markdown output for each model
for (model_name in names(performance_results)) {
  cat(paste0("### Performance Metrics for ", model_name, "\n\n"))
  
  # Capture the output in markdown format
  output <- capture.output(display(performance_results[[model_name]], format = "markdown", digits = 2, caption = NULL))
  
  # Print the captured output to the console
  cat(paste(output, collapse = "\n"))
  
  cat("\n\n")
}
## ### Performance Metrics for EXP1_7v9_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |272.55 | 272.61 | 279.15 |       0.16 |       0.00 | 0.16 | 0.45 |  1.00 |     0.60 |    -78.36 |        8.67e-03 |
## 
## ### Performance Metrics for EXP1_9v11_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |      ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:--------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |279.21 | 279.27 | 285.80 |   8.80e-03 |       0.00 | 8.80e-03 | 0.49 |  1.00 |     0.68 |    -85.24 |            0.01 |
## 
## ### Performance Metrics for EXP1_13v15_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |281.18 | 281.24 | 287.78 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -63.76 |            0.05 |
## 
## ### Performance Metrics for EXP2_7v9_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |278.37 | 278.43 | 284.97 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -89.17 |            0.05 |
## 
## ### Performance Metrics for EXP2_9v11_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |280.94 | 281.00 | 287.54 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -73.88 |            0.05 |
## 
## ### Performance Metrics for EXP2_13v15_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |279.26 | 279.32 | 285.85 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -51.87 |            0.05 |
## 
## ### Performance Metrics for EXP2_NEGCONT_pooled
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |280.76 | 280.82 | 287.36 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -59.10 |            0.05 |
## 
## ### Performance Metrics for EEcon3v1
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |133.49 | 133.61 | 138.70 |            |       0.00 | 0.48 |  1.00 |     0.65 |    -65.12 |            0.08 |
## 
## ### Performance Metrics for EEcon3v2
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |141.65 | 141.77 | 146.86 |       0.06 |       0.00 | 0.06 | 0.48 |  1.00 |     0.65 |    -34.84 |            0.02 |
## 
## ### Performance Metrics for EEcon3v4
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |139.15 | 139.27 | 144.36 |       0.10 |       0.00 | 0.10 | 0.46 |  1.00 |     0.62 |    -45.33 |            0.02 |
## 
## ### Performance Metrics for EEcon3v5
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |142.15 | 142.28 | 147.36 |       0.03 |       0.00 | 0.03 | 0.49 |  1.00 |     0.67 |    -36.26 |            0.03 |
## 
## ### Performance Metrics for SAcon3v1
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |140.56 | 140.69 | 145.77 |       0.02 |       0.00 | 0.02 | 0.49 |  1.00 |     0.67 |    -45.81 |            0.03 |
## 
## ### Performance Metrics for SAcon3v2
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |137.90 | 138.02 | 143.11 |       0.16 |       0.00 | 0.16 | 0.45 |  1.00 |     0.60 |    -36.61 |            0.02 |
## 
## ### Performance Metrics for SAcon3v4
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |141.99 | 142.11 | 147.20 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -39.69 |            0.07 |
## 
## ### Performance Metrics for SAcon3v5
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |141.89 | 142.02 | 147.10 |       0.02 |       0.00 | 0.02 | 0.49 |  1.00 |     0.68 |    -26.64 |            0.04 |
## 
## ### Performance Metrics for training_learning
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |225.81 | 225.88 | 231.96 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -53.26 |            0.06 |
## 
## ### Performance Metrics for training_7v9
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |224.20 | 224.28 | 230.35 |            |       0.00 | 0.50 |  1.00 |     0.69 |    -67.70 |            0.06 |
## 
## ### Performance Metrics for training_7v8
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) |  ICC | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |224.58 | 224.66 | 230.73 |       0.04 |       0.00 | 0.04 | 0.49 |  1.00 |     0.66 |    -62.42 |            0.02 |
## 
## ### Performance Metrics for training_EEvsSA_test
## 
## 
## 
## |AIC    |   AICc |    BIC | R2 (cond.) | R2 (marg.) | RMSE | Sigma | Log_loss | Score_log | Score_spherical |
## |:------|:------:|:------:|:----------:|:----------:|:----:|:-----:|:--------:|:---------:|:---------------:|
## |227.71 | 227.86 | 236.93 |            |   7.64e-04 | 0.50 |  1.00 |     0.69 |    -53.29 |            0.04 |
# Loop for simulating residuals
for (model_name in names(models)) {
  cat(paste0("Simulated Residuals and Plot for ", model_name, "\n"))
  
  # Simulate residuals
  simulated_res <- simulateResiduals(fittedModel = models[[model_name]])
  
  # Plot simulated residuals
  plot(simulated_res)
  
  cat("\n\n")
}
## Simulated Residuals and Plot for EXP1_7v9_pooled

## 
## 
## Simulated Residuals and Plot for EXP1_9v11_pooled

## 
## 
## Simulated Residuals and Plot for EXP1_13v15_pooled

## 
## 
## Simulated Residuals and Plot for EXP2_7v9_pooled

## 
## 
## Simulated Residuals and Plot for EXP2_9v11_pooled

## 
## 
## Simulated Residuals and Plot for EXP2_13v15_pooled

## 
## 
## Simulated Residuals and Plot for EXP2_NEGCONT_pooled

## 
## 
## Simulated Residuals and Plot for EEcon3v1

## 
## 
## Simulated Residuals and Plot for EEcon3v2

## 
## 
## Simulated Residuals and Plot for EEcon3v4

## 
## 
## Simulated Residuals and Plot for EEcon3v5

## 
## 
## Simulated Residuals and Plot for SAcon3v1

## 
## 
## Simulated Residuals and Plot for SAcon3v2

## 
## 
## Simulated Residuals and Plot for SAcon3v4

## 
## 
## Simulated Residuals and Plot for SAcon3v5

## 
## 
## Simulated Residuals and Plot for training_learning

## 
## 
## Simulated Residuals and Plot for training_7v9

## 
## 
## Simulated Residuals and Plot for training_7v8

## 
## 
## Simulated Residuals and Plot for training_EEvsSA_test